miχpods: MOM6
Contents
miχpods: MOM6#
bootstrap error on mean, median?
Try daily vs hourly
add TAO χpods
fix EUC max at 110
Questions:
N2 vs N2T
match time intervals
match vertical resolution and extent
restrict TAO S2 to ADCP depth range
restrict to top 200m.
Notes:
Filtering out MLD makes a big difference!
SInce we’re working with derivatives does the vertical grid matter (as long as it is coarser than observations)?
1 difvho ocean_vertical_heat_diffusivity
2 difvso ocean_vertical_salt_diffusivity
Need lateral / neutral terms for total χ, ε
Why can’t I reproduce the ε figure?
Why can’t I save the new station data
References#
Warner & Moum (2019)
Setup#
%load_ext watermark
import datetime
import glob
import os
import warnings
import pandas as pd
import cf_xarray
import dask
import dcpy
import distributed
import flox.xarray
import holoviews as hv
import hvplot.xarray
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import xarray as xr
import xgcm
import pump
from pump import mixpods
hv.notebook_extension('bokeh')
dask.config.set({"array.slicing.split_large_chunks": False})
plt.style.use("bmh")
plt.rcParams["figure.dpi"] = 140
xr.set_options(keep_attrs=True, display_expand_data=False)
gcmdir = "/glade/campaign/cgd/oce/people/bachman/TPOS_1_20_20_year/OUTPUT/" # MITgcm output directory
stationdirname = gcmdir
%watermark -iv
The watermark extension is already loaded. To reload it, use:
%reload_ext watermark
pump : 0.1
distributed : 2022.7.0
numpy : 1.22.4
ncar_jobqueue: 2021.4.14
cf_xarray : 0.7.4
dask_jobqueue: 0.7.3
dask : 2022.7.0
pandas : 1.4.3
ipywidgets : 7.7.1
flox : 0.5.9
json : 2.0.9
holoviews : 1.14.9
xgcm : 0.6.0
hvplot : 0.8.0
xarray : 2022.6.0rc0
sys : 3.10.5 | packaged by conda-forge | (main, Jun 14 2022, 07:06:46) [GCC 10.3.0]
dcpy : 0.1.dev385+g121534c
matplotlib : 3.5.2
import ncar_jobqueue
if "client" in locals():
client.close()
del client
if "cluster" in locals():
cluster.close()
#env = {"OMP_NUM_THREADS": "3", "NUMBA_NUM_THREADS": "3"}
# cluster = distributed.LocalCluster(
# n_workers=8,
# threads_per_worker=1,
# env=env
# )
if "cluster" in locals():
del cluster
#cluster = ncar_jobqueue.NCARCluster(
# project="NCGD0011",
# scheduler_options=dict(dashboard_address=":9797"),
#)
# cluster = dask_jobqueue.PBSCluster(
# cores=9, processes=9, memory="108GB", walltime="02:00:00", project="NCGD0043",
# env_extra=env,
# )
import dask_jobqueue
cluster = dask_jobqueue.PBSCluster(
cores=1, # The number of cores you want
memory='23GB', # Amount of memory
processes=1, # How many processes
queue='casper', # The type of queue to utilize (/glade/u/apps/dav/opt/usr/bin/execcasper)
local_directory='$TMPDIR', # Use your local directory
resource_spec='select=1:ncpus=1:mem=23GB', # Specify resources
project='ncgd0011', # Input your project ID here
walltime='02:00:00', # Amount of wall time
interface='ib0', # Interface to use
)
cluster.adapt(minimum_jobs=2, maximum_jobs=12, wait_count=300)
<distributed.deploy.adaptive.Adaptive at 0x2b3c167bfa90>
Read data#
TAO#
tao_Ri = xr.load_dataarray("tao-hourly-Ri-seasonal-percentiles.nc").cf.guess_coord_axis()
chipod.dd
tao_gridded = (
xr.open_dataset(
os.path.expanduser("~/work/pump/zarrs/tao-gridded-ancillary.zarr"), chunks="auto", engine="zarr"
)
.sel(longitude=-140, time=slice("1996", None))
)
tao_gridded["depth"].attrs["axis"] = "Z"
tao_gridded["shallowest"].attrs.clear()
chipod = (
dcpy.oceans.read_cchdo_chipod_file("~/datasets/microstructure/osu/chipods_0_140W.nc")
.sel(time=slice("2015"))
# move from time on the half hour to on the hour
.coarsen(time=2, boundary="trim").mean()
# "Only χpods between 29 and 69 m are used in this analysis as
# deeper χpods are more strongly influenced by the variability of zEUC than by surface forcing."
# - Warner and Moum (2019)
.sel(depth=slice(29, 69))
.reindex(time=tao_gridded.time, method="nearest", tolerance="5min")
.pipe(mixpods.normalize_z, sort=True)
)
tao_gridded = (
tao_gridded
.update(
chipod[["chi", "KT", "eps", "Jq"]]
.reset_coords(drop=True)
.rename({"depth": "depthchi"})
)
)
tao_gridded = mixpods.prepare(tao_gridded, oni=pump.obs.process_oni())
tao_gridded = tao_gridded.update(
mixpods.pdf_N2S2(tao_gridded.drop_vars(["shallowest", "zeuc"]))
)
tao_gridded = mixpods.load(tao_gridded)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/core/dataset.py:245: UserWarning: The specified Dask chunks separate the stored chunks along dimension "depth" starting at index 58. This could degrade performance. Instead, consider rechunking after loading.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/core/dataset.py:245: UserWarning: The specified Dask chunks separate the stored chunks along dimension "time" starting at index 139586. This could degrade performance. Instead, consider rechunking after loading.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/core/dataset.py:245: UserWarning: The specified Dask chunks separate the stored chunks along dimension "longitude" starting at index 2. This could degrade performance. Instead, consider rechunking after loading.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/core.py:119: RuntimeWarning: divide by zero encountered in true_divide
return func(*(_execute_task(a, cache) for a in args))
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/core.py:119: RuntimeWarning: invalid value encountered in log10
return func(*(_execute_task(a, cache) for a in args))
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/array/reductions.py:608: RuntimeWarning: All-NaN slice encountered
return np.nanmax(x_chunk, axis=axis, keepdims=keepdims)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/array/reductions.py:608: RuntimeWarning: All-NaN slice encountered
return np.nanmax(x_chunk, axis=axis, keepdims=keepdims)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/core.py:119: RuntimeWarning: divide by zero encountered in log10
return func(*(_execute_task(a, cache) for a in args))
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/core.py:119: RuntimeWarning: divide by zero encountered in true_divide
return func(*(_execute_task(a, cache) for a in args))
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/core.py:119: RuntimeWarning: invalid value encountered in log10
return func(*(_execute_task(a, cache) for a in args))
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/core.py:119: RuntimeWarning: invalid value encountered in log10
return func(*(_execute_task(a, cache) for a in args))
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/core.py:119: RuntimeWarning: invalid value encountered in log10
return func(*(_execute_task(a, cache) for a in args))
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/core.py:119: RuntimeWarning: divide by zero encountered in true_divide
return func(*(_execute_task(a, cache) for a in args))
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/flox/aggregations.py:258: RuntimeWarning: invalid value encountered in true_divide
finalize=lambda sum_, count: sum_ / count,
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/array/reductions.py:608: RuntimeWarning: All-NaN slice encountered
return np.nanmax(x_chunk, axis=axis, keepdims=keepdims)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/core.py:119: RuntimeWarning: divide by zero encountered in log10
return func(*(_execute_task(a, cache) for a in args))
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/array/reductions.py:608: RuntimeWarning: All-NaN slice encountered
return np.nanmax(x_chunk, axis=axis, keepdims=keepdims)
np.log10(tao_gridded.eps).hvplot.hist(bins=101)
np.log10(tao_gridded.eps).resample(time="M").mean().hvplot.quadmesh(clim=(-7.5, -6))
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/flox/aggregations.py:258: RuntimeWarning: invalid value encountered in true_divide
finalize=lambda sum_, count: sum_ / count,
Possible reasons for difference
My MLD seems deeper even though I use the same ΔT=0.15 threshold. It could be that they’ve used Akima splines to interpolate in the vertical
I’ve filtered to DCL, so accounting for MLD and EUC movement. I’m not sure they did that.
Only 𝜒pods between 29 and 69 m are used in this analysis as deeper 𝜒pods are more strongly influenced by the variability of zEUC than by surface forcing.
da = np.log10(tao_gridded.eps_ri)
#da["Rig_T_bins"] = pd.Index(
# [pd.Interval(np.log10(i.left), np.log10(i.right))
# for i in tao_gridded.Rig_T_bins.data]
#)
(
da
.sel(stat="mean")
.sel(enso_transition_phase=["La-Nina cool", "El-Nino warm"])
.plot(hue="enso_transition_phase", ylim=(-7.6, -5.9), marker='.')
)
ax = plt.gca()
ticks = [0.04, 0.1, 0.25, 0.63, 1.6]
ax.set_yticks([-7.25, -7, -6.5, -6])
ax.set_xticks(np.log10(ticks))
ax.set_xticklabels([f"{a:2f}" for a in ticks])
dcpy.plots.linex(np.log10(0.25))
(
np.log10(tao_gridded.eps_ri)
.sel(stat="mean")
.sel(enso_transition_phase=["La-Nina cool", "El-Nino warm"])
.plot(hue="enso_transition_phase", ylim=(-7.5, -6))
)
dcpy.plots.linex(0.25)
sub = tao_gridded.sel(time="2010-01")
t = sub.theta.hvplot.quadmesh(cmap="turbo_r")
dt = (sub.theta - sub.theta.reset_coords(drop=True).cf.sel(Z=[0, -5]).cf.max("Z")).hvplot.quadmesh(clim=(-0.15, 0.15), cmap="RdBu_r")
newmld = mixpods.get_mld_tao_theta(sub.theta.reset_coords(drop=True))
(dt
* sub.reset_coords().mldT.hvplot.line(color='w', line_width=2)
* newmld.reset_coords(drop=True).hvplot.line(color='orange', line_width=1)
).opts(frame_width=1200)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/array/reductions.py:608: RuntimeWarning: All-NaN slice encountered
return np.nanmax(x_chunk, axis=axis, keepdims=keepdims)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/array/reductions.py:608: RuntimeWarning: All-NaN slice encountered
return np.nanmax(x_chunk, axis=axis, keepdims=keepdims)
cluster
(
tao_gridded.reset_coords().mldT.resample(time="5D").mean().hvplot.line()
* mixpods.get_mld_tao_theta((tao_gridded.reset_coords().theta)).resample(time="5D").mean().hvplot.line()
)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/flox/aggregate_flox.py:105: RuntimeWarning: invalid value encountered in true_divide
out /= nanlen(group_idx, array, size=size, axis=axis, fill_value=0)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/array/reductions.py:608: RuntimeWarning: All-NaN slice encountered
return np.nanmax(x_chunk, axis=axis, keepdims=keepdims)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/array/reductions.py:608: RuntimeWarning: All-NaN slice encountered
return np.nanmax(x_chunk, axis=axis, keepdims=keepdims)
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/flox/aggregations.py:258: RuntimeWarning: invalid value encountered in true_divide
finalize=lambda sum_, count: sum_ / count,
tao_gridded.u.cf.plot()
tao_gridded.eucmax.plot()
[<matplotlib.lines.Line2D at 0x2b38c6a4a8c0>]
MITgcm stations#
stations = pump.model.read_stations_20(stationdirname)
gcmeq = stations.sel(longitude=[-155.025, -140.025, -125.025, -110.025], method="nearest")
#enso = pump.obs.make_enso_mask()
#mitgcm["enso"] = enso.reindex(time=mitgcm.time.data, method="nearest")
#gcmeq["eucmax"] = pump.calc.get_euc_max(gcmeq.u)
# pump.calc.calc_reduced_shear(gcmeq)
#oni = pump.obs.process_oni()
#gcmeq["enso_transition"] = mixpods.make_enso_transition_mask(oni).reindex(time=gcmeq.time.data, method="nearest")
mitgcm = gcmeq.sel(longitude=-140.025, method="nearest")
metrics = mixpods.normalize_z(pump.model.read_metrics(stationdirname), sort=True)
mitgcm = mixpods.normalize_z(mitgcm, sort=True)
mitgcm_grid = xgcm.Grid(
metrics.sel(latitude=mitgcm.latitude, longitude=mitgcm.longitude, method="nearest"),
coords=({"Z": {"center": "depth", "outer": "RF"}, "Y": {"center": "latitude"}}),
metrics={"Z": ("drF", "drC")},
periodic=False,
boundary="fill",
fill_value=np.nan,
)
mitgcm.theta.attrs["standard_name"] = "sea_water_potential_temperature"
mitgcm["VISrI_Um"].attrs["standard_name"] = "ocean_vertical_x_viscosity"
mitgcm["VISrI_Vm"].attrs["standard_name"] = "ocean_vertical_y_viscosity"
mitgcm = mixpods.prepare(mitgcm, grid=mitgcm_grid, oni=pump.obs.process_oni()).sel(latitude=0, method="nearest").cf.sel(Z=slice(-250, 0))
mitgcm = mitgcm.update(mixpods.pdf_N2S2(mitgcm))
mitgcm = mixpods.load(mitgcm)
mitgcm
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/dask/array/core.py:4700: PerformanceWarning: Increasing number of chunks by factor of 37
result = blockwise(
<xarray.Dataset>
Dimensions: (depth: 100, time: 174000, RF: 101, N2T_bins: 29,
S2_bins: 29, enso_transition_phase: 7,
Rig_T_bins: 20, enso_2phase: 3, stat: 2)
Coordinates: (12/18)
* depth (depth) float32 -248.8 -246.2 -243.8 ... -3.75 -1.25
latitude float64 0.025
longitude float64 -140.0
* time (time) datetime64[ns] 1998-12-31T18:00:00 ... 2018...
* RF (RF) float64 -250.0 -247.5 -245.0 ... -5.0 -2.5 0.0
eucmax (time) float32 dask.array<chunksize=(162,), meta=np.ndarray>
... ...
* S2_bins (S2_bins) object (-5.0, -4.896551724137931] ... (-...
* enso_transition_phase (enso_transition_phase) object 'none' ... 'all'
bin_areas (N2T_bins, S2_bins) float64 0.0107 0.0107 ... 0.0107
* Rig_T_bins (Rig_T_bins) object (0.05, 0.1475] ... (1.9025, 2.0]
* enso_2phase (enso_2phase) object 'El-Nino' 'La-Nina' 'none'
* stat (stat) <U4 'mean' 'std'
Data variables: (12/29)
DFrI_TH (depth, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
KPPdiffKzT (depth, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
KPPg_TH (depth, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
KPPhbl (time) float32 dask.array<chunksize=(6000,), meta=np.ndarray>
KPPviscAz (depth, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
SSH (time) float32 dask.array<chunksize=(6000,), meta=np.ndarray>
... ...
shred2 (RF, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
Rig_T (RF, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
eps (RF, time) float32 dask.array<chunksize=(100, 6000), meta=np.ndarray>
n2s2pdf (N2T_bins, S2_bins, enso_transition_phase) float64 ...
eps_n2s2 (N2T_bins, S2_bins, enso_transition_phase) float64 ...
eps_ri (stat, Rig_T_bins, enso_2phase) float64 0.3784 ......
Attributes:
easting: longitude
northing: latitude
title: Station profile, index (i,j)=(1201,240)mitgcm.u.cf.plot()
mitgcm.mldT.reset_coords(drop=True).cf.plot()
mitgcm.eucmax.reset_coords(drop=True).cf.plot()
KeyboardInterrupt
mixpods.plot_n2s2pdf(mitgcm.n2s2pdf.sel(enso_transition_phase="none"))
<matplotlib.contour.QuadContourSet at 0x2b65e58af010>
MOM6 run : calculate ONI#
dirname = "/glade/scratch/dcherian/archive/gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods/ocn/hist"
static = xr.open_dataset(*glob.glob(f"{dirname}/*static*.nc"))
sfc = xr.open_mfdataset(
sorted(glob.glob(f"{dirname}/*sfc*")),
coords="minimal",
data_vars="minimal",
compat="override",
use_cftime=True,
parallel=True,
)
sfc["time"] = sfc.time + datetime.timedelta(days=365*1957)
#sfc["time"] = sfc.time + xr.coding.cftime_offsets.YearBegin(1957)
sfc.coords.update(static.drop("time"))
#sfc["tos"].attrs["coordinates"] = "geolon geolat"
sst = sfc.cf["sea_surface_temperature"]
sst.cf
Coordinates:
- CF Axes: * X: ['xh']
* Y: ['yh']
* T: ['time']
Z: n/a
- CF Coordinates: * longitude: ['xh']
* latitude: ['yh']
* time: ['time']
vertical: n/a
- Cell Measures: area: ['areacello']
volume: n/a
- Standard Names: cell_area: ['areacello']
- Bounds: n/a
# Calculate a monthly average sea surface temperature in the Nino 3.4 region (5°S-5°N, 170°W-120°W).
monthly_ = (
sst.cf.sel(latitude=slice(-5, 5), longitude=slice(-170, -120))
.cf.mean(["X", "Y"])
.resample(time="M")
.mean()
#.sel(time=slice("1960", "1999-12-31"))
#.reindex(
# time=xr.cftime_range("1955-01-01", monthly.time[-1].dt.strftime("%Y-%m-%d").data.item(), freq="MS")
#)
.load()
)
monthly = (
monthly_
.reindex(
time=xr.cftime_range(
"1956-01-01",
monthly_.time[-1].dt.strftime("%Y-%m-%d").data.item(),
freq="M",
calendar=monthly_.time.dt.calendar,
)
)
.convert_calendar("gregorian", use_cftime=False)
)
#monthly = monthly_
monthly.plot()
[<matplotlib.lines.Line2D at 0x2b740fc07280>]
(
oniobs.hvplot.line(x="time", label="obs")
* onimom6.hvplot.line(x="time", label="MOM6")
).opts(ylabel="ONI [°C]")
oniobs.enso_transition_mask.plot()
onimom6.enso_transition_mask.plot(color='r')
[<matplotlib.lines.Line2D at 0x2b6d7fd20eb0>]
MOM6 sections#
import mom6_tools
import xgcm
from mom6_tools.sections import combine_nested, read_raw_files
from mom6_tools.wright_eos import wright_eos
def combine_variables_by_coords(dsets):
import itertools
import tqdm
all_vars = set(itertools.chain(*[ds.data_vars for ds in dsets]))
merged = xr.merge(
xr.combine_by_coords([ds[var] for ds in dsets if ds.sizes["time"] > 0], combine_attrs="override")
for var in tqdm.tqdm(all_vars)
)
return merged
import glob
# dirname = "/glade/scratch/gmarques/gmom.e23.GJRAv3.TL319_t061_zstar_N65.mixpods.001/run"
dirname = "/glade/scratch/dcherian/gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods/run/"
files = sorted(glob.glob(f"{dirname}/*TAO*140W*.nc.*"))
lons = [140] # [90, 110, 140, 155, 170] # TODO: Add 125
dsets = read_raw_files(files, parallel=True)
combined = combine_variables_by_coords(dsets)
#mom6tao = read_tao_sections(dirname)
100%|██████████| 21/21 [02:36<00:00, 7.44s/it]
combined["time"] = combined["time"] + datetime.timedelta(days=1957*365)
def interp_to_center(ds):
import toolz as tlz
ix0 = np.arange(1, ds.sizes["xq"], 4)
ix1 = ix0 + 1
indices = list(tlz.interleave([ix0, ix1]))
# print(ds.xq.data[indices])
out = (
ds
#.isel(xq=[1, 2, 5, 6, 8, 9, 12, 13, 16, 17])
.isel(xq=[1, 2])
.coarsen(xq=2).mean()
)
out = out.sel(xh = out.xq.data, method="nearest", tolerance=0.05)
for var in out:
if "xq" in out[var].dims:
out[var] = out[var].rename({"xq": "xh"}).drop("xh")
return out.drop_vars("xq")
mom6tao = interp_to_center(combined).cf.chunk({"Y": -1})
mom6tao["time"] = mom6tao.indexes["time"].to_datetimeindex()
mom6tao["dens"] = wright_eos(mom6tao.thetao, mom6tao.so, 0)
mom6tao["dens"].attrs.update(
{"units": "kg/m^3", "standard_name": "sea_water_potential_density"}
)
mom6tao["densT"] = wright_eos(mom6tao.thetao, 35, 0)
mom6tao["densT"].attrs.update({"standard_name": "sea_water_potential_density", "units": "kg/m3"})
mom6tao
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xarray/coding/times.py:360: FutureWarning: Index.ravel returning ndarray is deprecated; in a future version this will return a view on self.
sample = dates.ravel()[0]
/glade/scratch/dcherian/tmp/ipykernel_272468/768503968.py:25: RuntimeWarning: Converting a CFTimeIndex with dates from a non-standard calendar, 'noleap', to a pandas.DatetimeIndex, which uses dates from the standard calendar. This may lead to subtle errors in operations that depend on the length of time between dates.
mom6tao["time"] = mom6tao.indexes["time"].to_datetimeindex()
<xarray.Dataset>
Dimensions: (yh: 49, time: 506640, xh: 1, zi: 66, nv: 2, zl: 65,
yq: 49)
Coordinates:
* yh (yh) float64 -5.911 -5.664 -5.417 ... 5.541 5.788 6.035
* time (time) datetime64[ns] 1958-01-06T00:30:00 ... 2015-12-3...
* xh (xh) float64 -140.0
* zi (zi) float64 0.0 2.5 5.0 7.5 ... 5.503e+03 5.751e+03 6e+03
* nv (nv) float64 1.0 2.0
* zl (zl) float64 1.25 3.75 6.25 ... 5.627e+03 5.876e+03
* yq (yq) float64 -6.035 -5.788 -5.541 ... 5.417 5.664 5.911
Data variables: (12/23)
Kd_heat (time, zi, yh, xh) float32 dask.array<chunksize=(8640, 40, 49, 1), meta=np.ndarray>
time_bnds (time, nv) timedelta64[ns] dask.array<chunksize=(8640, 2), meta=np.ndarray>
vo (time, zl, yq, xh) float32 dask.array<chunksize=(8640, 40, 49, 1), meta=np.ndarray>
tauy (time, yq, xh) float32 dask.array<chunksize=(8640, 49, 1), meta=np.ndarray>
Tflx_dia_diff (time, zi, yh, xh) float32 dask.array<chunksize=(8640, 40, 49, 1), meta=np.ndarray>
taux (time, yh, xh) float32 dask.array<chunksize=(8640, 49, 1), meta=np.ndarray>
... ...
volcello (time, zl, yh, xh) float32 dask.array<chunksize=(8640, 40, 49, 1), meta=np.ndarray>
uo (time, zl, yh, xh) float32 dask.array<chunksize=(8640, 40, 49, 1), meta=np.ndarray>
thetao (time, zl, yh, xh) float32 dask.array<chunksize=(8640, 40, 49, 1), meta=np.ndarray>
SW (time, yh, xh) float32 dask.array<chunksize=(8640, 49, 1), meta=np.ndarray>
dens (time, zl, yh, xh) float32 dask.array<chunksize=(8640, 40, 49, 1), meta=np.ndarray>
densT (time, zl, yh, xh) float32 dask.array<chunksize=(8640, 40, 49, 1), meta=np.ndarray>Save#
mom6tao.drop_vars(["average_DT", "average_T2", "average_T1", "time_bnds"]).chunk({"time": 24*365}).to_zarr(
f"/glade/scratch/dcherian/archive/gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods/ocn/moorings/tao.zarr",
mode="w",
consolidated=True,
)
<xarray.backends.zarr.ZarrStore at 0x2b746511da10>
Read sections#
mom6tao = (
xr.open_dataset(
"/glade/scratch/dcherian/archive/gmom.e23.GJRAv3.TL319_t061_zstar_N65.baseline.001.mixpods/ocn/moorings/tao.zarr",
engine="zarr",
chunks="auto"
)
)
# Unfortunately have to do this here so that Grid ios right.
mom6tao = mixpods.normalize_z(mom6tao, sort=True)
if "Kv_v" not in mom6tao:
warnings.warn("Kv_v not present. Assuming equal to Kv_u")
mom6tao["Kv_v"] = (mom6tao["vo"].dims, mom6tao.Kv_u.data)
mom6tao.Kv_u.attrs["standard_name"] = "ocean_vertical_x_viscosity"
mom6tao.Kv_v.attrs["standard_name"] = "ocean_vertical_y_viscosity"
/glade/scratch/dcherian/tmp/ipykernel_272468/784068456.py:12: UserWarning: Kv_v not present. Assuming equal to Kv_u
warnings.warn("Kv_v not present. Assuming equal to Kv_u")
grid = xgcm.Grid(
mom6tao,
coords={"Z": {"outer": "zi", "center": "zl"},
"X": {"center": "xh"},
"Y": {"center": "yh", "left": "yq"},
},
periodic=False,
boundary="fill",
fill_value=np.nan,
metrics = {("Z",): "h"},
)
grid
<xgcm.Grid>
Z Axis (not periodic, boundary='fill'):
* outer zi --> center
* center zl --> outer
X Axis (not periodic, boundary='fill'):
* center xh
Y Axis (not periodic, boundary='fill'):
* center yh --> left
* left yq --> center
mom6tao = mixpods.prepare(mom6tao, grid, monthly)
mom6tao
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yq', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/cf_xarray/accessor.py:1638: UserWarning: Variables {'areacello'} not found in object but are referred to in the CF attributes.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yh', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
/glade/u/home/dcherian/miniconda3/envs/pump/lib/python3.10/site-packages/xgcm/grid.py:1471: UserWarning: Metric at ('time', 'zi', 'yq', 'xh') being interpolated from metrics at dimensions ('time', 'zl', 'yh', 'xh'). Boundary value set to 'extend'.
warnings.warn(
<xarray.Dataset>
Dimensions: (time: 506640, yh: 49, xh: 1, zi: 66, zl: 65, nv: 2,
yq: 49)
Coordinates: (12/13)
* nv (nv) float64 1.0 2.0
* time (time) datetime64[ns] 1958-01-06T00:30:00 ... 2015-12-3...
* xh (xh) float64 -140.0
* yh (yh) float64 -5.911 -5.664 -5.417 ... 5.541 5.788 6.035
* yq (yq) float64 -6.035 -5.788 -5.541 ... 5.417 5.664 5.911
* zi (zi) float64 -6e+03 -5.751e+03 -5.503e+03 ... -2.5 -0.0
... ...
eucmax (time, xh) float64 dask.array<chunksize=(8760, 1), meta=np.ndarray>
mldT (time, yh, xh) float64 dask.array<chunksize=(8760, 49, 1), meta=np.ndarray>
oni (time) float32 nan nan nan nan nan ... nan nan nan nan nan
warm_mask (time) bool True True True True ... True True True True
cool_mask (time) bool False False False False ... False False False
enso_transition (time) <U12 '____________' ... '____________'
Data variables: (12/25)
KPP_OBLdepth (time, yh, xh) float32 dask.array<chunksize=(506640, 49, 1), meta=np.ndarray>
KPP_buoyFlux (time, zi, yh, xh) float32 dask.array<chunksize=(8760, 26, 49, 1), meta=np.ndarray>
KPP_kheat (time, zi, yh, xh) float32 dask.array<chunksize=(8760, 26, 49, 1), meta=np.ndarray>
Kd_heat (time, zi, yh, xh) float32 dask.array<chunksize=(8760, 26, 49, 1), meta=np.ndarray>
Kv_u (time, zl, yh, xh) float32 dask.array<chunksize=(8760, 25, 49, 1), meta=np.ndarray>
SW (time, yh, xh) float32 dask.array<chunksize=(506640, 49, 1), meta=np.ndarray>
... ...
Kv_v (time, zl, yq, xh) float32 dask.array<chunksize=(8760, 25, 49, 1), meta=np.ndarray>
N2T (time, zi, yh, xh) float32 dask.array<chunksize=(8760, 1, 49, 1), meta=np.ndarray>
S2 (time, zi, yh, xh) float32 dask.array<chunksize=(8760, 1, 1, 1), meta=np.ndarray>
shred2 (time, zi, yh, xh) float32 dask.array<chunksize=(8760, 1, 1, 1), meta=np.ndarray>
Rig_T (time, zi, yh, xh) float32 dask.array<chunksize=(8760, 1, 1, 1), meta=np.ndarray>
eps (time, zi, yh, xh) float32 dask.array<chunksize=(8760, 1, 1, 1), meta=np.ndarray>mom6140 = mom6tao.cf.sel(longitude=-140, latitude=0, method="nearest")
mom6140 = mom6140.cf.sel(Z=slice(-250, 0))
mom6140 = mom6140.update(mixpods.pdf_N2S2(mom6140))
mom6140
mom6140 = mixpods.load(mom6140)
mom6140.uo.cf.plot(robust=True)
mom6140.eucmax.cf.plot()
mom6140.mldT.cf.plot(lw=0.5, color='k')
mixpods.plot_n2s2pdf(mom6140.n2s2pdf.sel(enso_transition_phase="none"))
<matplotlib.contour.QuadContourSet at 0x2acdb4c75e10>
Pick simulations#
datasets = {"TAO": tao_gridded, "MITgcm": mitgcm, "MOM6": mom6140}
for name, ds in datasets.items():
(
ds
.cf["sea_water_x_velocity"]
.cf["Z"]
.plot(marker='.', ls="none", label=name)
)
plt.legend()
<matplotlib.legend.Legend at 0x2b85772a3f70>
Compare EUC maximum and MLD#
mixpods.plot_timeseries(datasets, "mldT", obs="TAO")